1 from __future__
import absolute_import
2 from __future__
import division
3 from __future__
import print_function
6 import tensorflow
as tf
9 #Central to TF is tensors. Primitive values shaped into an array. Rank is dimensions. Shape is tuple of ints specifying arrays length
10 #numpy arrays represent tensors
12 #The TF core is the computational graph and running it in a session
14 # Graphs have Operations representing nodes, and tensors as edges
16 a = tf.constant(3.0, dtype=tf.float32)
17 b = tf.constant(4.0) # also tf.float32 implicitly
26 #The above outputs the computational graph, each with a unique name. Not values.
29 # evaluation requires creating a tf.Session object.
32 print(sess.run(total))
34 print(sess.run({'ab':(a, b), 'total':total}))
36 vec = tf.random_uniform(shape=(3,))
41 print(sess.run((out1, out2)))
43 # A ML graph needs variable result. Placeholders are designed to hold future values
44 x = tf.placeholder(tf.float32)
45 y = tf.placeholder(tf.float32)
48 print(sess.run(z, feed_dict={x: 3, y: 4.5}))
49 print(sess.run(z, feed_dict={x: [1, 3], y: [2, 4]}))
51 #Datasets are however the preffered way of working with models
59 slices = tf.data.Dataset.from_tensor_slices(my_data)
60 next_item = slices.make_one_shot_iterator().get_next()
64 print(sess.run(next_item))
65 except tf.errors.OutOfRangeError:
68 #If statefull the itterator may need to initialized
70 r = tf.random_normal([10,3])
71 dataset = tf.data.Dataset.from_tensor_slices(r)
72 iterator = dataset.make_initializable_iterator()
73 next_row = iterator.get_next()
75 sess.run(iterator.initializer)
78 print(sess.run(next_row))
79 except tf.errors.OutOfRangeError:
82 #Trainable models need values to to be modified in the graph to reach now outputs with same inputs.
83 # Layers are used and package variables and opperations together.
84 # A denseely-connected layer applies an opitonal activation on the output to all functions inputs
86 x = tf.placeholder(tf.float32, shape=[None, 3])
87 linear_model = tf.layers.Dense(units=1)
90 #Initializing the layers resulting variables
91 init = tf.global_variables_initializer()
94 #Now we can evaluate the linear model's output tensors as any otherself.
96 print(sess.run(y, {x: [[1, 2, 3],[4, 5, 6]]}))
98 # Condensed removing access to the linear model layer
99 x = tf.placeholder(tf.float32, shape=[None, 3])
100 y = tf.layers.dense(x, units=1)
101 init = tf.global_variables_initializer()
103 print(sess.run(y, {x: [[1, 2, 3], [4, 5, 6]]}))
105 # Feature columns are easiest done with tf.feature_column.input_layer and only accepts dense columnsself.
106 # Viewing requires a wrapper of indicator_column
109 'sales' : [[5], [10], [8], [9]],
110 'department': ['sports', 'sports', 'gardening', 'gardening']}
112 department_column = tf.feature_column.categorical_column_with_vocabulary_list(
113 'department', ['sports', 'gardening'])
114 department_column = tf.feature_column.indicator_column(department_column)
117 tf.feature_column.numeric_column('sales'),
121 inputs = tf.feature_column.input_layer(features, columns)
123 #Feature columns have an internal state like layers and require initializationself.
124 # Categorical columns use lookup tables requiring a different intiialization, tf.tables_initializer
126 var_init = tf.global_variables_initializer()
127 table_init = tf.tables_initializer()
129 sess.run((var_init, table_init))
131 # once sess initializes. Run
132 print(sess.run(inputs))
136 # Some arbritrary inputs
137 x = tf.constant([[1], [2], [3], [4]], dtype=tf.float32, name="C1")
138 y_true = tf.constant([[0], [-1], [-2], [-3]], dtype=tf.float32, name="C2")
140 #The training model with one outputs
141 linear_model = tf.layers.Dense(units=1, name="L1")
142 y_pred = linear_model(x)
144 init = tf.global_variables_initializer()
146 print(sess.run(y_pred))
149 loss = tf.losses.mean_squared_error(labels=y_true, predictions=y_pred)
150 print(sess.run(loss))
152 #Optimizers test the loss
153 optimizer = tf.train.GradientDescentOptimizer(0.01)
154 train = optimizer.minimize(loss)
158 _, loss_value = sess.run((train, loss))
168 x
= tf
.constant([[4], [3], [2], [1]], dtype
=tf
.float32
, name
="X")
169 #The comparison values
170 y_true
= tf
.constant([[0], [-1], [-2], [-3]], dtype
=tf
.float32
, name
="Y_t")
172 linear_model
= tf
.layers
.Dense(units
=1, name
="Dense_LM")
173 '''Dense layer y_pred that takes a batch of input vectors,'''
174 #assinged to y_predictions
175 y_pred
= linear_model(x
)
176 #With loss operations based on y_true labels and predictions dictated by the model
177 loss
= tf
.losses
.mean_squared_error(labels
=y_true
, predictions
=y_pred
)
178 '''y_pred produces a single output and MSE judges it'''
180 #Set up the basic trainer to minimize loss
181 optimizer
= tf
.train
.GradientDescentOptimizer(0.01, name
="gdo")
182 train
= optimizer
.minimize(loss
)
185 init
= tf
.global_variables_initializer()
189 #Train n times outputting a blank and the loss value.
190 #Put the training function and loss function into the run function.
191 for i
in range(10000):
192 _
, loss_value
= sess
.run((train
, loss
))
195 #Run the variables through the prediction model
196 print(sess
.run(y_pred
))
202 #TensorBoard is a way to viusalize the graphself.
203 writer
= tf
.summary
.FileWriter('.')
204 writer
.add_graph(tf
.get_default_graph())
206 # Go into the directory and type `tensorboard --logdir .` to view your graph